In [1]:
# loading python modules
import numpy as np
np.random.seed(0)
from matplotlib.pyplot import figure
from terminaltables import AsciiTable
import matplotlib.pyplot as plt
%matplotlib inline
from __future__ import division
In [2]:
# loading custom inet modules
from inet import DataLoader, __version__
from inet.motifs import iicounter, motifcounter
from inet.utils import chem_squarematrix, elec_squarematrix
print('Inet version {}'.format(__version__))
In [3]:
# use the dataset to create the null hypothesis
mydataset = DataLoader('../data/PV')
In [4]:
# e.g. mydataset.PV[2].values will return the different configurations with 2 PV cells
nPV = range(9)
for i in nPV:
nPV[i] = np.sum(mydataset.IN[i].values())
In [5]:
for i, experiment in enumerate(nPV):
print('{:<3d} recordings with {:2d} PV-cells'.format(experiment, i))
In [5]:
# for the moment, we only count experiments with 2 or 3 PVs
# later we use mydataset.PV[2:]
PV2 = sum(mydataset.IN[2].values())
PV3 = sum(mydataset.IN[3].values())
PV2, PV3
Out[5]:
In [10]:
PC = mydataset.motif.ii_chem_found/mydataset.motif.ii_chem_tested
PE = mydataset.motif.ii_elec_found/mydataset.motif.ii_elec_tested
PC2 = mydataset.motif.ii_c2_found/mydataset.motif.ii_c2_tested
Pdiv = mydataset.motif.ii_div_found/mydataset.motif.ii_div_tested
Pcon = mydataset.motif.ii_con_found/mydataset.motif.ii_con_tested
Plin = mydataset.motif.ii_lin_found/mydataset.motif.ii_lin_tested
PC1E = mydataset.motif.ii_c1e_found/mydataset.motif.ii_c1e_tested
PC2E = mydataset.motif.ii_c2e_found/mydataset.motif.ii_c2e_tested
info = [
['key', 'Probability', 'Motif', 'Value'],
['ii_chem', 'P(C)', 'chemical synapse',PC ],
['ii_elec', 'P(E)', 'electrical synapse',PE ],
['','',''],
['ii_c2', 'P(C U C)','bidirectional chemical synapse',PC2],
['ii_con', 'Pcon', 'convergent inhibitory motifs', Pcon],
['ii_div', 'Pdiv', 'divergent inhibitory motifs', Pdiv],
['ii_lin', 'Plin', 'linear inhibitory motifs', Plin],
['',''],
['ii_c1e', 'P(C U E)', 'electrical and unidirectional chemical', PC1E],
['ii_c2e', 'P(2C U E):','electrical and bidirectional chemical', PC2E],
]
print(AsciiTable(info).table)
In [11]:
def mychem_simulation():
"""
simulate inhibitory chemical connections of the dataset
Return
------
A iicounter object
"""
mycount = iicounter()
for _ in range(PV2):
mycount += iicounter(chem_squarematrix(size=2,prob = PC))
for _ in range(PV3):
mycount += iicounter(chem_squarematrix(size=3, prob = PC))
return(mycount)
In [12]:
print(mychem_simulation()) # one simulation, test the number of connection tested
In [10]:
# must contain the same number of tested connections
for key in mychem_simulation().keys():
print(key, mydataset.motif[key])
In [14]:
# simulate the whole data set 1,000 times
n_chem = list()
n_bichem = list()
n_div = list()
n_con = list()
n_chain = list()
for _ in range(1000):
syn_counter = mychem_simulation()
n_chem.append(syn_counter['ii_chem']['found']) # null hypothesis
n_bichem.append(syn_counter['ii_c2']['found'])
n_con.append(syn_counter['ii_con']['found'])
n_div.append(syn_counter['ii_div']['found'])
n_chain.append(syn_counter['ii_lin']['found'])
If the null hypothesis is correctly implemented, we should see almost the same number of chemical synases as in the experiments.
In [15]:
np.mean(n_chem) # on average the same number of unidirectional connections
Out[15]:
In [16]:
mydataset.motif['ii_chem']['found']
Out[16]:
If we found a number which is different form the empirical, we must revise our null hypothese.
In [17]:
np.mean(n_bichem) # on average the same number of bidirectional connections????
Out[17]:
Define analiticaly the null hypothese:
In [18]:
PC*PC*mydataset.motif['ii_c2']['tested'] # null hypothesis
Out[18]:
In [19]:
mydataset.motif['ii_c2']['found'] # however, we found more empirically
Out[19]:
In [20]:
# Number of divergent connections found should be very similar to the ones calculates
np.mean(n_div)
Out[20]:
In [21]:
PC*PC*mydataset.motif['ii_div']['tested'] # null hypothesis
Out[21]:
In [22]:
np.mean(n_con)
Out[22]:
In [23]:
PC*PC*mydataset.motif['ii_con']['tested'] # null hypothesis
Out[23]:
In [24]:
def myelec_simulation():
"""
simulate inhibitory electrical connections of the dataset
Return
------
A iicounter object
"""
mycount = iicounter()
for _ in range(PV2):
mycount += iicounter(elec_squarematrix(size=2,prob = PE))
for _ in range(PV3):
mycount += iicounter(elec_squarematrix(size=3, prob = PE))
return(mycount)
In [25]:
print(myelec_simulation()) # one simulation, test the number of connection tested
In [26]:
# must contain the same number of tested connections
for key in myelec_simulation().keys():
print(key, mydataset.motif[key])
In [27]:
n_elec = list()
for _ in range(1000):
syn_elec = myelec_simulation()
n_elec.append(syn_elec['ii_elec']['found'])
Similarly, we must see almost the same number of electrical connections as with the experiments
In [28]:
np.mean(n_elec)
Out[28]:
In [29]:
mydataset.motif.ii_elec_found # voila!
Out[29]:
In [30]:
C = chem_squarematrix(size = 2, prob = PC)
E = elec_squarematrix(size = 2, prob = PE)
C + E # when a chemical (1) and electrical (2) synapse add , they have the motif 3
Out[30]:
In [31]:
def myii_simulation():
"""
simulate inhibitory electrical and chemical connections of the dataset
Return
------
A iicounter object
"""
mycount = iicounter()
for _ in range(PV2):
C = chem_squarematrix(size = 2, prob = PC)
E = elec_squarematrix(size = 2, prob = PE)
S = C + E
x, y = np.where(S==2) # test to eliminate '1' from the oposite direction
mycoor = zip(y,x)
for i,j in mycoor:
if S[i,j]==1:
S[i,j]=3
S[j,i]=0
mycount += iicounter( S )
for _ in range(PV3):
C = chem_squarematrix(size = 3, prob = PC)
E = elec_squarematrix(size = 3, prob = PE)
S = C + E
x, y = np.where(S==2) # test to eliminate '1' from the oposite direction
mycoor = zip(y,x)
for i,j in mycoor:
if S[i,j]==1:
S[i,j]=3
S[j,i]=0
mycount += iicounter( S )
return(mycount)
In [32]:
myii_simulation()# one simulation, again, test the number of connections tested
Out[32]:
In [33]:
# must contain the same number of tested connections
for key in myii_simulation().keys():
print(key, mydataset.motif[key])
In [35]:
# simulate the whole data set 1,000 times
n_chem = list()
n_elec = list()
n_c1e = list()
n_c2e = list()
n_c2 = list()
n_div = list()
n_con = list()
n_lin = list()
for _ in range(10000):
syn_counter = myii_simulation()
n_chem.append( syn_counter['ii_chem']['found'] ) # null hypothesis
n_elec.append( syn_counter['ii_elec']['found'] ) # null hypothesis
n_c1e.append( syn_counter['ii_c1e']['found'] )
n_c2e.append( syn_counter['ii_c2e']['found'] )
n_c2.append( syn_counter['ii_c2']['found'])
n_con.append( syn_counter['ii_div']['found'])
n_div.append( syn_counter['ii_div']['found'])
n_lin.append( syn_counter['ii_lin']['found'])
In [37]:
info = [
['Syn Motif', 'Simulations', 'Empirical'],
['chemical', np.mean(n_chem), mydataset.motif['ii_chem']['found']],
['electrical', np.mean(n_elec), mydataset.motif['ii_elec']['found']],
[''],
['2 chem',np.mean(n_c2),mydataset.motif['ii_c2']['found']],
['convergent', np.mean(n_con), mydataset.motif['ii_con']['found']],
['divergent', np.mean(n_div), mydataset.motif['ii_div']['found']],
['chains', np.mean(n_chain), mydataset.motif['ii_lin']['found']],
[''],
['1 chem + elec', np.mean(n_c1e), mydataset.motif['ii_c1e']['found']],
['2 chem + elec', np.mean(n_c2e), mydataset.motif['ii_c2e']['found']],
]
print(AsciiTable(info).table)
Let's see if the connections found correspond to the theoretical values for the complex motifs.
In [38]:
mydataset.motif['ii_c1e']
Out[38]:
In [39]:
PCE1 = mydataset.motif.ii_c1e_found /mydataset.motif.ii_c1e_tested
PCE1
Out[39]:
In [40]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC*PE)*mydataset.motif.ii_c1e_tested
Out[40]:
In [41]:
mydataset.motif['ii_c2e']
Out[41]:
In [42]:
PCE2 = mydataset.motif.ii_c2e_found /mydataset.motif.ii_c2e_tested
PCE2
Out[42]:
In [43]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PE*PC*PC)*mydataset.motif.ii_c2e_tested #
Out[43]:
In [44]:
mydataset.motif['ii_elec']
Out[44]:
In [45]:
PE = mydataset.motif.ii_elec_found /mydataset.motif.ii_elec_tested
PE
Out[45]:
In [46]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PE)*mydataset.motif.ii_elec_tested #
Out[46]:
In [47]:
mydataset.motif['ii_chem']
Out[47]:
In [48]:
PC = mydataset.motif.ii_chem_found /mydataset.motif.ii_chem_tested
PC
Out[48]:
In [49]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC)*mydataset.motif.ii_chem_tested #
Out[49]:
In [50]:
mydataset.motif['ii_c2']
Out[50]:
In [51]:
PC1 = mydataset.motif.ii_chem_found /mydataset.motif.ii_chem_tested
PC1
Out[51]:
In [52]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC1*PC1)*mydataset.motif.ii_c2_tested
Out[52]:
In [53]:
# calculate alfa_levels according to Zhao et al., 2001
alpha_rec = (PC1/(PC*PC))-1
print(alpha_rec)
In [54]:
mydataset.motif['ii_div']
Out[54]:
In [55]:
Pdiv = mydataset.motif.ii_div_found /mydataset.motif.ii_div_tested
Pdiv
Out[55]:
In [56]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC*PC)*mydataset.motif.ii_div_tested
Out[56]:
In [57]:
# calculate alfa_levels according to Zhao et al., 2001
alpha_div = (Pdiv/(PC*PC))-1
print(alpha_div)
In [58]:
mydataset.motif['ii_con']
Out[58]:
In [59]:
Pcon = mydataset.motif.ii_con_found / mydataset.motif.ii_con_tested
Pcon
Out[59]:
In [60]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC*PC)*mydataset.motif.ii_con_tested
Out[60]:
In [61]:
# calculate alfa_levels according to Zhao et al., 2001
alpha_con = (Pcon/(PC*PC))-1
print(alpha_con)
In [63]:
mydataset.motif['ii_lin']
Out[63]:
In [65]:
Pchain = mydataset.motif.ii_lin_found / mydataset.motif.ii_lin_tested
Pchain
Out[65]:
In [66]:
# definition of the null hypothese
# if this value is close to the simulation, we accept the null hypothese
(PC*PC)*mydataset.motif.ii_lin_tested
Out[66]:
In [67]:
# calculate alfa_levels according to Zhao et al., 2001
alpha_chain = (Pchain/(PC*PC))-1
print(alpha_chain)
In [68]:
# operate with NumPY arrays rather than with lists
n_chem = np.array(n_chem)
n_elec = np.array(n_elec)
n_c2 = np.array(n_c2)
n_con = np.array(n_con)
n_div = np.array(n_div)
n_chain = np.array(n_chain)
n_c1e = np.array(n_c1e)
n_c2e = np.array(n_c2e)
In [71]:
pii_chem = len(n_chem[n_chem>mydataset.motif.ii_chem_found]) / n_chem.size
pii_elec = len(n_elec[n_elec>mydataset.motif.ii_elec_found])/ n_elec.size
pii_c2 = len(n_c2[n_c2 > mydataset.motif.ii_c2_found])/ n_c2.size
pii_con = len(n_con[n_con < mydataset.motif.ii_con_found])/n_con.size # under-rep
pii_div = len(n_div[n_div > mydataset.motif.ii_div_found])/n_div.size
pii_chain = len(n_chain[n_chain < mydataset.motif.ii_lin_found])/n_chain.size # under-rep
pii_c1e = len(n_c1e[n_c1e > mydataset.motif.ii_c1e_found])/ n_c1e.size
pii_c2e = len(n_c2e[n_c2e > mydataset.motif.ii_c2e_found])/ n_c2e.size
In [73]:
info = [
['Syn Motif', 'Simulations', 'Empirical', 'P(Simulations)', 'alpha'],
['chemical', np.mean(n_chem), mydataset.motif.ii_chem_found, pii_chem],
['electrical', np.mean(n_elec), mydataset.motif.ii_elec_found, pii_elec],
[''],
['2 chem bidirect', np.mean(n_c2), mydataset.motif.ii_c2_found, pii_c2,alpha_rec],
['convergent', np.mean(n_con), mydataset.motif.ii_con_found, pii_con, alpha_con],
['divergent', np.mean(n_div), mydataset.motif.ii_div_found, pii_div, alpha_div],
['chain', np.mean(n_chain), mydataset.motif.ii_lin_found, pii_chain, alpha_chain],
[''],
['1 chem + elec', np.mean(n_c1e), mydataset.motif.ii_c1e_found, pii_c1e],
['2 chem + elec', np.mean(n_c2e), mydataset.motif.ii_c2e_found, pii_c2e],
]
print(AsciiTable(info).table)
In [74]:
from inet.plots import barplot
In [77]:
# This is our null hypothesis
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_chem, n_found = mydataset.motif.ii_chem_found, larger=1);
ax.set_title('Chemical synapses', size=20);
ax.set_ylim(ymax=40);
ax.tick_params(labelsize=20)
fig.savefig('ii_chem.pdf')
In [78]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_elec, n_found = mydataset.motif.ii_elec_found, larger=1);
ax.set_title('Electrical synapses', size=20);
ax.set_ylim(ymax=40);
ax.tick_params(labelsize=20)
fig.savefig('ii_elec.pdf')
In [79]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_c2, n_found = mydataset.motif.ii_c2_found, larger=1);
ax.set_title('Bidirectional chemical', size=20);
ax.set_ylim(ymax=10);
ax.tick_params(labelsize=20)
fig.savefig('ii_c2.pdf')
In [80]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_con, n_found = mydataset.motif.ii_con_found, larger=False);
ax.set_title('Convergent inhibitory', size=20);
ax.set_ylim(ymin=0, ymax=4);
ax.tick_params(labelsize=20)
fig.savefig('ii_con.pdf')
In [81]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_div, n_found = mydataset.motif.ii_div_found, larger=1);
ax.set_title('Divergent inhibitory', size=20);
ax.set_ylim(ymin=0, ymax=5);
ax.tick_params(labelsize=20)
fig.savefig('ii_div.pdf')
In [83]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_chain, n_found = mydataset.motif.ii_lin_found, larger=False);
ax.set_title('Linear chains', size=20);
ax.set_ylim(ymin=0, ymax=10);
ax.tick_params(labelsize=20)
fig.savefig('ii_chain.pdf')
#pii_chain # change this value in the plot!
In [84]:
fig = figure()
ax = fig.add_subplot(111)
ax = barplot(simulation = n_c1e, n_found = mydataset.motif.ii_c1e_found, larger=1);
ax.set_title('Electrical and one chemical', size=20);
ax.set_ylim(ymax=25);
ax.tick_params(labelsize=20)
fig.savefig('ii_c1e.pdf')
In [85]:
fig = figure(5)
ax = fig.add_subplot(111)
ax = barplot(simulation = n_c2e, n_found = mydataset.motif.ii_c2e_found, larger=1);
ax.set_title('Electrical and two chemical', size=20);
ax.set_ylim(ymin = 0, ymax=10);
ax.tick_params(labelsize=20)
fig.savefig('ii_c2d.pdf')
In [ ]: